Blow away all directories/files on each build
authorAlex Crichton <alex@alexcrichton.com>
Fri, 18 Jul 2014 02:53:47 +0000 (19:53 -0700)
committerAlex Crichton <alex@alexcrichton.com>
Fri, 18 Jul 2014 22:56:16 +0000 (15:56 -0700)
And aftewards selectively move them back into place if they're fresh. This
prevents stale files from showing up from old builds.

This currently breaks anyone build `build=` scripts, the fix is coming in the
next commit.

Closes #205

src/cargo/ops/cargo_rustc/context.rs
src/cargo/ops/cargo_rustc/fingerprint.rs
src/cargo/ops/cargo_rustc/job_queue.rs
src/cargo/ops/cargo_rustc/layout.rs [new file with mode: 0644]
src/cargo/ops/cargo_rustc/mod.rs
tests/test_cargo_compile.rs
tests/test_cargo_compile_path_deps.rs

index d6b43371d2d0e8dce70fcb95bba2aefa7a2ca53e..471cd6928d024027dcc4ec8ccaa2b3899d87dd3a 100644 (file)
@@ -1,5 +1,3 @@
-use std::io::IoError;
-use std::io;
 use std::str;
 use std::collections::{HashMap, HashSet};
 
@@ -7,6 +5,8 @@ use core::{Package, PackageId, PackageSet, Resolve, Target};
 use util;
 use util::{CargoResult, ChainError, internal, Config};
 
+use super::layout::{Layout, LayoutProxy};
+
 #[deriving(Show)]
 pub enum PlatformRequirement {
     Target,
@@ -20,10 +20,9 @@ pub struct Context<'a, 'b> {
     pub config: &'b mut Config<'b>,
     pub resolve: &'a Resolve,
 
-    dest: Path,
-    host_dest: Path,
-    deps_dir: Path,
-    host_deps_dir: Path,
+    env: &'a str,
+    host: Layout,
+    target: Option<Layout>,
     host_dylib: (String, String),
     package_set: &'a PackageSet,
     target_dylib: (String, String),
@@ -31,10 +30,9 @@ pub struct Context<'a, 'b> {
 }
 
 impl<'a, 'b> Context<'a, 'b> {
-    pub fn new(resolve: &'a Resolve, deps: &'a PackageSet,
+    pub fn new(env: &'a str, resolve: &'a Resolve, deps: &'a PackageSet,
                config: &'b mut Config<'b>,
-               dest: Path, deps_dir: Path,
-               host_dest: Path, host_deps_dir: Path)
+               host: Layout, target: Option<Layout>)
                -> CargoResult<Context<'a, 'b>> {
         let target_dylib = try!(Context::dylib_parts(config.target()));
         let host_dylib = if config.target().is_none() {
@@ -44,8 +42,9 @@ impl<'a, 'b> Context<'a, 'b> {
         };
         Ok(Context {
             rustc_version: try!(Context::rustc_version()),
-            dest: dest,
-            deps_dir: deps_dir,
+            env: env,
+            host: host,
+            target: target,
             primary: false,
             resolve: resolve,
             package_set: deps,
@@ -53,8 +52,6 @@ impl<'a, 'b> Context<'a, 'b> {
             target_dylib: target_dylib,
             host_dylib: host_dylib,
             requirements: HashMap::new(),
-            host_dest: host_dest,
-            host_deps_dir: host_deps_dir,
         })
     }
 
@@ -89,22 +86,19 @@ impl<'a, 'b> Context<'a, 'b> {
     /// Prepare this context, ensuring that all filesystem directories are in
     /// place.
     pub fn prepare(&mut self, pkg: &'a Package) -> CargoResult<()> {
-        debug!("creating target dir; path={}", self.dest.display());
-
-        try!(self.mk_target(&self.dest).chain_error(||
-            internal(format!("Couldn't create the target directory for {} at {}",
-                     pkg.get_name(), self.dest.display()))));
-        try!(self.mk_target(&self.host_dest).chain_error(||
-            internal(format!("Couldn't create the host directory for {} at {}",
-                     pkg.get_name(), self.dest.display()))));
-
-        try!(self.mk_target(&self.deps_dir).chain_error(||
-            internal(format!("Couldn't create the directory for dependencies for {} at {}",
-                     pkg.get_name(), self.deps_dir.display()))));
-
-        try!(self.mk_target(&self.host_deps_dir).chain_error(||
-            internal(format!("Couldn't create the directory for dependencies for {} at {}",
-                     pkg.get_name(), self.deps_dir.display()))));
+        try!(self.host.prepare().chain_error(|| {
+            internal(format!("couldn't prepare build directories for `{}`",
+                             pkg.get_name()))
+        }));
+        match self.target {
+            Some(ref mut target) => {
+                try!(target.prepare().chain_error(|| {
+                    internal(format!("couldn't prepare build directories \
+                                      for `{}`", pkg.get_name()))
+                }));
+            }
+            None => {}
+        }
 
         let targets = pkg.get_targets().iter();
         for target in targets.filter(|t| t.get_profile().is_compile()) {
@@ -114,10 +108,6 @@ impl<'a, 'b> Context<'a, 'b> {
         Ok(())
     }
 
-    fn mk_target(&self, target: &Path) -> Result<(), IoError> {
-        io::fs::mkdir_recursive(target, io::UserRWX)
-    }
-
     fn build_requirements(&mut self, pkg: &'a Package, target: &'a Target,
                           req: PlatformRequirement,
                           visiting: &mut HashSet<&'a PackageId>) {
@@ -148,20 +138,16 @@ impl<'a, 'b> Context<'a, 'b> {
         self.primary = true;
     }
 
-    /// Return the destination directory for output.
-    pub fn dest<'a>(&'a self, plugin: bool) -> &'a Path {
-        if self.primary {
-            if plugin {&self.host_dest} else {&self.dest}
+    /// Returns the appropriate directory layout for either a plugin or not.
+    pub fn layout<'a>(&'a self, plugin: bool) -> LayoutProxy<'a> {
+        if plugin {
+            LayoutProxy::new(&self.host, self.primary)
         } else {
-            self.deps_dir(plugin)
+            LayoutProxy::new(self.target.as_ref().unwrap_or(&self.host),
+                             self.primary)
         }
     }
 
-    /// Return the destination directory for dependencies.
-    pub fn deps_dir<'a>(&'a self, plugin: bool) -> &'a Path {
-        if plugin {&self.host_deps_dir} else {&self.deps_dir}
-    }
-
     /// Return the (prefix, suffix) pair for dynamic libraries.
     ///
     /// If `plugin` is true, the pair corresponds to the host platform,
@@ -183,6 +169,9 @@ impl<'a, 'b> Context<'a, 'b> {
         if target.is_rlib() {
             ret.push(format!("lib{}.rlib", stem));
         }
+        if target.is_bin() {
+            ret.push(stem.to_string());
+        }
         assert!(ret.len() > 0);
         return ret;
     }
@@ -200,12 +189,18 @@ impl<'a, 'b> Context<'a, 'b> {
                 .expect("Should have found package")
         })
         .filter_map(|pkg| {
-            pkg.get_targets().iter().find(|&t| {
-                t.is_lib() && t.get_profile().is_compile()
-            }).map(|t| (pkg, t))
+            pkg.get_targets().iter().find(|&t| self.is_relevant_target(t))
+               .map(|t| (pkg, t))
         })
         .collect()
     }
+
+    pub fn is_relevant_target(&self, target: &Target) -> bool {
+        target.is_lib() && match self.env {
+            "test" => target.get_profile().is_compile(),
+            _ => target.get_profile().get_env() == self.env,
+        }
+    }
 }
 
 impl PlatformRequirement {
index 26cd66512bf5951caa0398ad254a8dc805fac77e..8954706acdf4eba239f97ddc4af50a8273d7a782 100644 (file)
@@ -1,6 +1,6 @@
 use std::hash::Hasher;
 use std::hash::sip::SipHasher;
-use std::io::File;
+use std::io::{fs, File};
 
 use core::{Package, Target};
 use util;
@@ -17,20 +17,52 @@ use super::context::Context;
 /// it as the first part of the return tuple. It will then prepare a job to
 /// update the fingerprint if this package is actually rebuilt as part of
 /// compilation, returning the job as the second part of the tuple.
+///
+/// The third part of the tuple is a job to run when a package is discovered to
+/// be fresh to ensure that all of its artifacts are moved to the correct
+/// location.
 pub fn prepare(cx: &mut Context, pkg: &Package,
-               targets: &[&Target]) -> CargoResult<(Freshness, Job)> {
-    let short = short_hash(pkg.get_package_id());
-    let fingerprint_loc = cx.dest(false).join(format!(".{}.{}.fingerprint",
-                                                      pkg.get_name(),
-                                                      short));
+               targets: &[&Target]) -> CargoResult<(Freshness, Job, Job)> {
+    let filename = format!(".{}.{}.fingerprint", pkg.get_name(),
+                           short_hash(pkg.get_package_id()));
+    let filename = filename.as_slice();
+    let (old_fingerprint_loc, new_fingerprint_loc) = {
+        let layout = cx.layout(false);
+        (layout.old_root().join(filename), layout.root().join(filename))
+    };
 
-    let (is_fresh, fingerprint) = try!(is_fresh(pkg, &fingerprint_loc,
+    // First, figure out if the old location exists, and if it does whether it's
+    // still fresh or not.
+    let (is_fresh, fingerprint) = try!(is_fresh(pkg, &old_fingerprint_loc,
                                                 cx, targets));
+
+    // Prepare a job to update the location of the new fingerprint.
+    let new_fingerprint_loc2 = new_fingerprint_loc.clone();
     let write_fingerprint = Job::new(proc() {
-        try!(File::create(&fingerprint_loc).write_str(fingerprint.as_slice()));
+        let mut f = try!(File::create(&new_fingerprint_loc2));
+        try!(f.write_str(fingerprint.as_slice()));
         Ok(Vec::new())
     });
-    Ok((if is_fresh {Fresh} else {Dirty}, write_fingerprint))
+
+    // Prepare a job to copy over all old artifacts into their new destination.
+    let mut pairs = Vec::new();
+    pairs.push((old_fingerprint_loc, new_fingerprint_loc));
+    for &target in targets.iter() {
+        for filename in cx.target_filenames(target).iter() {
+            let filename = filename.as_slice();
+            let layout = cx.layout(target.get_profile().is_plugin());
+            pairs.push((layout.old_root().join(filename),
+                        layout.root().join(filename)));
+        }
+    }
+    let move_old = Job::new(proc() {
+        for &(ref src, ref dst) in pairs.iter() {
+            try!(fs::rename(src, dst));
+        }
+        Ok(Vec::new())
+    });
+
+    Ok((if is_fresh {Fresh} else {Dirty}, write_fingerprint, move_old))
 }
 
 fn is_fresh(dep: &Package, loc: &Path, cx: &mut Context, targets: &[&Target])
index 7488f24f0cd95892d3185045b2d9b232ef6f185a..a52a23913cb5c0decec8fd37ab55e09a98e5e6ec 100644 (file)
@@ -10,7 +10,7 @@ use super::job::Job;
 
 pub struct JobQueue<'a, 'b> {
     pool: TaskPool,
-    queue: DependencyQueue<'a, (&'a Package, Job)>,
+    queue: DependencyQueue<'a, (&'a Package, (Job, Job))>,
     tx: Sender<Message>,
     rx: Receiver<Message>,
     active: HashMap<&'a PackageId, uint>,
@@ -22,7 +22,8 @@ type Message = (PackageId, Freshness, CargoResult<Vec<Job>>);
 impl<'a, 'b> JobQueue<'a, 'b> {
     pub fn new(config: &'b mut Config<'b>,
                resolve: &'a Resolve,
-               jobs: Vec<(&'a Package, Freshness, Job)>) -> JobQueue<'a, 'b> {
+               jobs: Vec<(&'a Package, Freshness, (Job, Job))>)
+               -> JobQueue<'a, 'b> {
         let (tx, rx) = channel();
         let mut queue = DependencyQueue::new();
         for &(pkg, _, _) in jobs.iter() {
@@ -56,17 +57,18 @@ impl<'a, 'b> JobQueue<'a, 'b> {
         while self.queue.len() > 0 {
             loop {
                 match self.queue.dequeue() {
-                    Some((id, Fresh, (pkg, _))) => {
+                    Some((id, Fresh, (pkg, (_, fresh)))) => {
                         assert!(self.active.insert(id, 1u));
                         try!(self.config.shell().status("Fresh", pkg));
                         self.tx.send((id.clone(), Fresh, Ok(Vec::new())));
+                        try!(fresh.run());
                     }
-                    Some((id, Dirty, (pkg, job))) => {
+                    Some((id, Dirty, (pkg, (dirty, _)))) => {
                         assert!(self.active.insert(id, 1));
                         try!(self.config.shell().status("Compiling", pkg));
                         let my_tx = self.tx.clone();
                         let id = id.clone();
-                        self.pool.execute(proc() my_tx.send((id, Dirty, job.run())));
+                        self.pool.execute(proc() my_tx.send((id, Dirty, dirty.run())));
                     }
                     None => break,
                 }
diff --git a/src/cargo/ops/cargo_rustc/layout.rs b/src/cargo/ops/cargo_rustc/layout.rs
new file mode 100644 (file)
index 0000000..47c85ff
--- /dev/null
@@ -0,0 +1,109 @@
+//! Management of the directory layout of a build
+//!
+//! The directory layout is a little tricky at times, hence a separate file to
+//! house this logic. The current layout looks like this:
+//!
+//!     # This is the root directory for all output, the top-level package
+//!     # places all of its output here.
+//!     target/
+//!
+//!         # This is the root directory for all output of *dependencies*
+//!         deps/
+//!
+//!         # This is a temporary directory as part of the build process. When a
+//!         # build starts, it initially moves the old `deps` directory to this
+//!         # location. This is done to ensure that there are no stale artifacts
+//!         # lying around in the build directory which may cause a build to
+//!         # succeed where it would fail elsewhere.
+//!         #
+//!         # If a package is determined to be fresh, its files are moved out of
+//!         # this directory and back into `deps`.
+//!         old-deps/
+//!
+//!         # Similar to old-deps, this is where all of the output under
+//!         # `target/` is moved at the start of a build.
+//!         old-root/
+
+use std::io;
+use std::io::{fs, IoResult};
+
+pub struct Layout {
+    root: Path,
+    deps: Path,
+
+    old_deps: Path,
+    old_root: Path,
+}
+
+pub struct LayoutProxy<'a> {
+    root: &'a Layout,
+    primary: bool,
+}
+
+impl Layout {
+    pub fn new(root: Path) -> Layout {
+        Layout {
+            deps: root.join("deps"),
+            old_deps: root.join("old-deps"),
+            old_root: root.join("old-root"),
+            root: root,
+        }
+    }
+
+    pub fn prepare(&mut self) -> IoResult<()> {
+        if !self.root.exists() {
+            try!(fs::mkdir_recursive(&self.root, io::UserRWX));
+        }
+
+        if self.old_deps.exists() {
+            try!(fs::rmdir_recursive(&self.old_deps));
+        }
+        if self.old_root.exists() {
+            try!(fs::rmdir_recursive(&self.old_root));
+        }
+        if self.deps.exists() {
+            try!(fs::rename(&self.deps, &self.old_deps));
+        }
+
+        try!(fs::mkdir(&self.deps, io::UserRWX));
+        try!(fs::mkdir(&self.old_root, io::UserRWX));
+
+        for file in try!(fs::readdir(&self.root)).iter() {
+            if !file.is_file() { continue }
+
+            try!(fs::rename(file, &self.old_root.join(file.filename().unwrap())));
+        }
+
+        Ok(())
+    }
+
+    pub fn dest<'a>(&'a self) -> &'a Path { &self.root }
+    pub fn deps<'a>(&'a self) -> &'a Path { &self.deps }
+    pub fn old_dest<'a>(&'a self) -> &'a Path { &self.old_root }
+    pub fn old_deps<'a>(&'a self) -> &'a Path { &self.old_deps }
+}
+
+impl Drop for Layout {
+    fn drop(&mut self) {
+        let _ = fs::rmdir_recursive(&self.old_deps);
+        let _ = fs::rmdir_recursive(&self.old_root);
+    }
+}
+
+impl<'a> LayoutProxy<'a> {
+    pub fn new(root: &'a Layout, primary: bool) -> LayoutProxy<'a> {
+        LayoutProxy {
+            root: root,
+            primary: primary,
+        }
+    }
+
+    pub fn root(&self) -> &'a Path {
+        if self.primary {self.root.dest()} else {self.root.deps()}
+    }
+    pub fn deps(&self) -> &'a Path { self.root.deps() }
+
+    pub fn old_root(&self) -> &'a Path {
+        if self.primary {self.root.old_dest()} else {self.root.old_deps()}
+    }
+}
index f37df6373131916d84bed70358cebf65a7a9017b..fa2ca9630ee3d747c19ad486e0e28efa5ed69ff7 100644 (file)
@@ -11,6 +11,7 @@ mod context;
 mod fingerprint;
 mod job;
 mod job_queue;
+mod layout;
 
 type Args = Vec<String>;
 
@@ -41,18 +42,15 @@ pub fn compile_targets<'a>(env: &str, targets: &[&'a Target], pkg: &'a Package,
 
     debug!("compile_targets; targets={}; pkg={}; deps={}", targets, pkg, deps);
 
-    let host_dir = pkg.get_absolute_target_dir()
-                        .join(uniq_target_dest(targets).unwrap_or(""));
-    let host_deps_dir = host_dir.join("deps");
-
-    let target_dir = pkg.get_absolute_target_dir()
-                        .join(config.target().unwrap_or(""))
-                        .join(uniq_target_dest(targets).unwrap_or(""));
-    let deps_target_dir = target_dir.join("deps");
+    let root = pkg.get_absolute_target_dir();
+    let dest = uniq_target_dest(targets).unwrap_or("");
+    let host_layout = layout::Layout::new(root.join(dest));
+    let target_layout = config.target().map(|target| {
+        layout::Layout::new(root.join(target).join(dest))
+    });
 
-    let mut cx = try!(Context::new(resolve, deps, config,
-                                   target_dir, deps_target_dir,
-                                   host_dir, host_deps_dir));
+    let mut cx = try!(Context::new(env, resolve, deps, config,
+                                   host_layout, target_layout));
 
     // First ensure that the destination directory exists
     try!(cx.prepare(pkg));
@@ -67,10 +65,7 @@ pub fn compile_targets<'a>(env: &str, targets: &[&'a Target], pkg: &'a Package,
 
         // Only compile lib targets for dependencies
         let targets = dep.get_targets().iter().filter(|target| {
-            target.is_lib() && match env {
-                "test" => target.get_profile().is_compile(),
-                _ => target.get_profile().get_env() == env,
-            }
+            cx.is_relevant_target(*target)
         }).collect::<Vec<&Target>>();
 
         try!(compile(targets.as_slice(), dep, &mut cx, &mut jobs));
@@ -85,7 +80,7 @@ pub fn compile_targets<'a>(env: &str, targets: &[&'a Target], pkg: &'a Package,
 
 fn compile<'a, 'b>(targets: &[&'a Target], pkg: &'a Package,
                    cx: &mut Context<'a, 'b>,
-                   jobs: &mut Vec<(&'a Package, Freshness, Job)>)
+                   jobs: &mut Vec<(&'a Package, Freshness, (Job, Job))>)
                    -> CargoResult<()> {
     debug!("compile_pkg; pkg={}; targets={}", pkg, targets);
 
@@ -126,7 +121,7 @@ fn compile<'a, 'b>(targets: &[&'a Target], pkg: &'a Package,
     // TODO: Can a fingerprint be per-target instead of per-package? Doing so
     //       would likely involve altering the granularity of key for the
     //       dependency queue that is later used to run jobs.
-    let (freshness, write_fingerprint) =
+    let (freshness, write_fingerprint, copy_old) =
         try!(fingerprint::prepare(cx, pkg, targets));
 
     // Note that we build the job backwards because each job will produce more
@@ -135,7 +130,7 @@ fn compile<'a, 'b>(targets: &[&'a Target], pkg: &'a Package,
     let build_libs = Job::all(libs, bins);
     let job = Job::all(build_cmds, vec![build_libs]);
 
-    jobs.push((pkg, freshness, job));
+    jobs.push((pkg, freshness, (job, copy_old)));
     Ok(())
 }
 
@@ -145,12 +140,13 @@ fn compile_custom(pkg: &Package, cmd: &str,
     let mut cmd = cmd.split(' ');
     // TODO: this shouldn't explicitly pass `false` for dest/deps_dir, we may
     //       be building a C lib for a plugin
+    let layout = cx.layout(false);
     let mut p = util::process(cmd.next().unwrap())
                      .cwd(pkg.get_root())
-                     .env("OUT_DIR", Some(cx.dest(false).as_str()
-                                            .expect("non-UTF8 dest path")))
-                     .env("DEPS_DIR", Some(cx.deps_dir(false).as_str()
-                                             .expect("non-UTF8 deps path")))
+                     .env("OUT_DIR", Some(layout.root().as_str()
+                                                .expect("non-UTF8 dest path")))
+                     .env("DEPS_DIR", Some(layout.deps().as_str()
+                                                 .expect("non-UTF8 deps path")))
                      .env("TARGET", cx.config.target());
     for arg in cmd {
         p = p.arg(arg);
@@ -166,10 +162,8 @@ fn rustc(package: &Package, target: &Target,
     let crate_types = target.rustc_crate_types();
     let root = package.get_root();
 
-    log!(5, "root={}; target={}; crate_types={}; dest={}; deps={}; \
-             verbose={}; req={}",
-         root.display(), target, crate_types, cx.dest(false).display(),
-         cx.deps_dir(false).display(), cx.primary, req);
+    log!(5, "root={}; target={}; crate_types={}; verbose={}; req={}",
+         root.display(), target, crate_types, cx.primary, req);
 
     let primary = cx.primary;
     let rustcs = prepare_rustc(package, target, crate_types, cx, req);
@@ -274,7 +268,7 @@ fn build_base_args(into: &mut Args,
     }
 
     into.push("--out-dir".to_string());
-    into.push(cx.dest(plugin).display().to_string());
+    into.push(cx.layout(plugin).root().display().to_string());
 
     if !plugin {
         fn opt(into: &mut Vec<String>, key: &str, prefix: &str,
@@ -296,17 +290,19 @@ fn build_base_args(into: &mut Args,
 
 fn build_deps_args(dst: &mut Args, package: &Package, cx: &Context,
                    plugin: bool) {
+    let layout = cx.layout(plugin);
     dst.push("-L".to_string());
-    dst.push(cx.dest(plugin).display().to_string());
+    dst.push(layout.root().display().to_string());
     dst.push("-L".to_string());
-    dst.push(cx.deps_dir(plugin).display().to_string());
+    dst.push(layout.deps().display().to_string());
 
     for &(_, target) in cx.dep_targets(package).iter() {
+        let layout = cx.layout(target.get_profile().is_plugin());
         for filename in cx.target_filenames(target).iter() {
             dst.push("--extern".to_string());
             dst.push(format!("{}={}/{}",
                      target.get_name(),
-                     cx.deps_dir(target.get_profile().is_plugin()).display(),
+                     layout.deps().display(),
                      filename));
         }
     }
index ae9ebf2f8eeab98c19b19b9fb71cabf919d3ffb9..fc68611a3586cc16fcdec84dce278cfb3a24c7ca 100644 (file)
@@ -1225,3 +1225,36 @@ test!(inferred_main_bin {
     assert_that(p.cargo_process("cargo-build"), execs().with_status(0));
     assert_that(process(p.bin("foo")), execs().with_status(0));
 })
+
+test!(deletion_causes_failure {
+    let p = project("foo")
+        .file("Cargo.toml", r#"
+            [package]
+            name = "foo"
+            version = "0.0.1"
+            authors = []
+
+            [dependencies.bar]
+            path = "bar"
+        "#)
+        .file("src/main.rs", r#"
+            extern crate bar;
+            fn main() {}
+        "#)
+        .file("bar/Cargo.toml", r#"
+            [package]
+            name = "bar"
+            version = "0.0.1"
+            authors = []
+        "#)
+        .file("bar/src/lib.rs", "");
+
+    assert_that(p.cargo_process("cargo-build"), execs().with_status(0));
+    let p = p.file("Cargo.toml", r#"
+            [package]
+            name = "foo"
+            version = "0.0.1"
+            authors = []
+        "#);
+    assert_that(p.cargo_process("cargo-build"), execs().with_status(101));
+})
index d721ec6444aeef3a39843f8495d38c90fa51d12a..d1d7b06c13511cb2a5a96605347ffb3af16225e9 100644 (file)
@@ -415,6 +415,7 @@ test!(no_rebuild_two_deps {
                                             COMPILING, baz.display(),
                                             COMPILING, bar.display(),
                                             COMPILING, p.root().display())));
+    assert_that(&p.bin("foo"), existing_file());
     assert_that(p.process(cargo_dir().join("cargo-build")),
                 execs().with_stdout(format!("{} baz v0.5.0 (file:{})\n\
                                              {} bar v0.5.0 (file:{})\n\
@@ -422,6 +423,7 @@ test!(no_rebuild_two_deps {
                                             FRESH, baz.display(),
                                             FRESH, bar.display(),
                                             FRESH, p.root().display())));
+    assert_that(&p.bin("foo"), existing_file());
 })
 
 test!(nested_deps_recompile {